home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
Trading on the Edge
/
Trading On The Edge - CD-ROM Toolkit (Wayzata Technology)(2031)(1994).bin
/
pc
/
mac_file
/
vendor_d
/
neuralwa
/
nw2v50
/
modnn.ind
< prev
next >
Wrap
File List
|
1993-08-23
|
12KB
|
520 lines
inst4.1
!****************************************************************
!* *
!* Modular Neural Network Generator *
!* *
!* &In Number of Input nodes *
!* &Hd1 Number of Hidden PEs in Local Expert *
!* &Out Number of Output nodes *
!* GNNO Number of Local Experts *
!* GNNH Number of Hidden PEs in Gating network *
!* *
!****************************************************************
! Modular Hierarchical Neural Network
!
?&In 1
>bge CheckHd1
@Err "Modular Neural Network MUST have at least one input PE"
:CheckHd1
?GNNO 2
>bge CheckOut
@Err "Modular Neural Network MUST have at least two local experts"
:CheckOut
?BPAa 0 !check for auto-associative
>beq Hetero1
=&Out &In
>br OutOK
:Hetero1 !hetero-associative
?&Out 1
>bge OutOK
@Err "Modular Neural Network MUST have at least one output PE"
:OutOK
@LdCS "modnn" !standard control strategy
!Load fixed schedules for DBD and EDBD.
?BPLf "Delta-Bar-Delta" !DBD?
>bne NotDBD1 !Branch to next test if not
@LdLR "dbd" !Load default dbd schedule
=GrLF "mnndbd" !instrument list file
>br LRS1 !Continue
:NotDBD1
?BPLf "Ext DBD" !EDBD
>bne NotEDBD1 !Branch if not
@LdLR "edbd" !Load default edbd schedule
=GrLF "mnnedbd" !instrument list file
>br LRS1
:NotEDBD1
@LdLR "backprop" !Load default backprop schedule
=GrLF "mnnbackp" !instrument list file
:LRS1
=netn "Untitled"
=DLnF 0 !learn re-display off
=DRcF 0 !recall re-display off
! *** Build the Input Layer ***
@LLdf !load default layer to menu area
=LDln "In" !layer name
=Lpes &In !copy # of input PEs from menu
=Ltrn "Linear" !buffer
?BPGN 0 !Gaussian Init?
>beq GN1 !No
=Lnse "Gaussian" !Yes
:GN1
=x 100 !place to put layer on screen
=y 50
#Incl "stdnwgtf.iif" !standard # weight fields
@LAdd !add the input layer
! *** Build the Projection Layer if requested ***
?BPPL 0 !Projection layer requested?
>beq ProjL1 !No
#Incl "proj_lyr.iif"
:ProjL1
=n6 LayN !Effective "Input layer"
=n5 Lpes !#processing elements
! Try to calculate reasonable spacing. Store in n7
=n7 50 !Layer spacing
=n0 &Hd1
?&Hd1 &Out
>bgt HdGTOut
=n0 &Out
:HdGTOut
*n7 n0
+n7 100
! Build Gating network Hidden layer
=n2 n5 !Num PEs in prev layer
=n3 n6 !"Hidden" layer number
+y 50 !up higher on display
?GNNH 1
>blt NoGatHd1 !No gating network output layer
@LLdf !start with default layer again
=LDln "GateHid" !layer name
=Lpes GNNH !Number of PEs
=LDsh 1 !Make this triangular
?BPTf "DNNA" !Check for DNNA
>bne DNNAGH
=Lsum "DNNA" !DNNA summation
:DNNAGH
=Ltrn BPTf !Transfer function
=Llrn BPLf !Learning Rule
?BPGN 0 !Gaussian Init?
>beq GNGH !No
=Lnse "Gaussian" !Yes
:GNGH
=Lerf "standard"
!Use net global schedules for DBD and EDBD. Create
!schedules for others
?BPLf "Delta-Bar-Delta" !DBD?
>beq LRSGH !If yes, use net global
?BPLf "Ext DBD" !EDBD?
>beq LRSGH !If yes, use net global
=BBLC GHLC !Gating hidden Learn Coef
@NLRS LDln
#Incl "bkpsched.iif" !corrupts n0,n1,f0,f1,f2
=Llrs LDln !Point to it
:LRSGH
=f0 n2 !# PEs in "input" layer
#Incl "wghtinit.iif"
=LInH f1
=LInL 0.0
-LInL LInH !-ve of high init limit
=LFPO FPOf !F' Offset
#Incl "stdnwgtf.iif" !standard # weight fields
@LAdd
=n3 LayN !Remember layer number
:NoGatHd1
+y 50 !up higher on display
! Build Gating Network Output Layer
@LLdf !start with default layer again
=LDln "GateOut" !layer name
=Lpes GNNO !Number of PEs
=LDsh 1 !Make this triangular
=Ltrn "Linear" !Exponential transfer function
=Lcmp "SoftMax" !SoftMax activation
=Llrn BPLf !Learning Rule
?BPGN 0 !Gaussian Init?
>beq GNGO !No
=Lnse "Gaussian" !Yes
:GNGO
!Use net global schedules for DBD and EDBD. Create
!schedules for others
?BPLf "Delta-Bar-Delta" !DBD?
>beq LRSGO !If yes, use net global
?BPLf "Ext DBD" !EDBD?
>beq LRSGO !If yes, use net global
=BBLC GOLC !Gating Output Learn Coef
@NLRS LDln
#Incl "bkpsched.iif" !corrupts n0,n1,f0,f1,f2
=Llrs LDln !Point to it
:LRSGO
=f0 n2 !# PEs in previous layer
#Incl "wghtinit.iif"
=LInH f1
=LInL 0.0
-LInL LInH !-ve of high init limit
=LFPO FPOf !F' Offset
=n2 Lpes !#processing elements
#Incl "stdnwgtf.iif" !standard # weight fields
@LAdd
!Now put in the Connections
=n1 LayN
=cnwt 1.0 !connection weight
=cnty WVar !Variable
=cnsc WRel !Relative
=SPEl n1 !Destination layer
@SlPE
=NPEl -1 !Bias layer
@NrPE
@LCFl !full connections
=NPEl n3 !Source (previous) layer
@NrPE
@LCFl !full connections
?n3 n6 !Compare input and hidden layer nums
>beq NoGatHd2 !skip if same
=NPEl n6 !"Input" layer
@NrPE
?BPCP 0 !connect prior layers?
>beq GNoPrior !skip if not
@LCFl !full connections
:GNoPrior
=SPEl n3 !Destination layer
@SlPE
@LCFl !full connections
=NPEl -1 !Bias layer
@NrPE
@LCFl !full connections
:NoGatHd2
! Build Local Expert Hidden layer
=n4 0 !Local expert index
:LocExp
+n4 1 !Increment counter
-y 50 !Down to hidden level
+x n7 !across
=n2 n5 !Num PEs in prev layer
=n3 n6 !"Hidden" layer number
?&Hd1 1
>blt NoExpHd1 !No local expert hidden layer
@LLdf !start with default layer
=LDln "ExpHid" !layer name
+LDln n4 !index the name
=Lpes &Hd1 !Number of PEs
?BPTf "DNNA" !Check for DNNA
>bne DNNAEH
=Lsum "DNNA" !DNNA summation
:DNNAEH
=Ltrn BPTf !Transfer function
=Llrn BPLf !Learning Rule
?BPGN 0 !Gaussian Init?
>beq GNEH !No
=Lnse "Gaussian" !Yes
:GNEH
=Lerf "standard"
!Use net global schedules for DBD and EDBD. Create
!schedules for others
?BPLf "Delta-Bar-Delta" !DBD?
>beq LRSEH !If yes, use net global
?BPLf "Ext DBD" !EDBD?
>beq LRSEH !If yes, use net global
?n4 1 !Only create sched first time
>bgt DontCrH
=BBLC H1LC !Hidden 1 learning coef.
@NLRS "ExpHid"
#Incl bkpsched.iif !corrupts n0,n1,f0,f1,f2
:DontCrH
=Llrs "ExpHid" !Point to it
:LRSEH
=f0 n2 !# PEs in "input" layer
#Incl "wghtinit.iif"
=LInH f1
=LInL 0.0
-LInL LInH !-ve of high init limit
=LFPO FPOf !F' Offset
#Incl "stdnwgtf.iif" !standard # weight fields
@LAdd
:NoExpHd1
+y 50 !up higher on display
! Build Local Expert Output Layer
@LLdf !start with default layer again
=LDln "ExpOut" !layer name
+LDln n4 !index the name
=Lpes &Out !Number of PEs
?BPTf "DNNA" !Check for DNNA
>bne DNNAEO
=Lsum "DNNA" !DNNA summation
=Ltrn "DNNA" !DNNA transfer
>br NoLinEO !Don't use Linear Output for DNNA
:DNNAEO
=Ltrn BPTf !Transfer function
?BPLi 0 !Linear Output?
>beq NoLinEO !Branch if not
=Ltrn "Linear"
:NoLinEO
=Llrn BPLf !Learning Rule
?BPGN 0 !Gaussian Init?
>beq GNEO !No
=Lnse "Gaussian" !Yes
:GNEO
!Use net global schedules for DBD and EDBD. Create
!schedules for others
?BPLf "Delta-Bar-Delta" !DBD?
>beq LRSEO !If yes, use net global
?BPLf "Ext DBD" !EDBD?
>beq LRSEO !If yes, use net global
?n4 1 !Only create sched first time
>bgt DontCrO
=BBLC OPLC !output learning coef.
@NLRS "ExpOut"
#Incl bkpsched.iif !corrupts n0,n1,f0,f1,f2
:DontCrO
=Llrs "ExpOut" !Point to it
:LRSEO
=f0 n2 !# PEs in previous layer
#Incl "wghtinit.iif"
=LInH f1
=LInL 0.0
-LInL LInH !-ve of high init limit
=LFPO FPOf !F' Offset
=n2 Lpes !#processing elements
#Incl "stdnwgtf.iif" !standard # weight fields
@LAdd
!Now put in the Connections
=n1 LayN
=cnwt 1.0 !connection weight
=cnty WVar !Variable
=cnsc WRel !Relative
?&Hd1 1
>blt NoExpHd2 !No local expert hidden layer
=n3 n1
-n3 n4 !subtract off output layers
-n3 1 !subtract off gating output layer
=SPEl n3 !Destination layer
@SlPE
=NPEl -1 !Bias layer
@NrPE
@LCFl !full connections
=NPEl n6 !Source layer (input layer)
@NrPE
@LCFl !full connections
?BPCP 0 !connect prior layers?
>beq NoExpHd2 !skip if not
=SPEl n1 !Destination layer
@SlPE
@LCFl !full connections
:NoExpHd2
=SPEl n1 !Destination layer
@SlPE
=NPEl n3 !Source layer
@NrPE
@LCFl !full connections
=NPEl -1 !Bias layer
@NrPE
@LCFl !full connections
!Loop if there are more experts
?n4 GNNO
>blt LocExp !Do next local expert
! Now build gating layer
@LLdf !start with default layer again
=x 100
+y 50
=LDln "Gate" !layer name
=Lpes &Out !Number of PEs
+Lpes 1
*Lpes GNNO
=n5 Lpes
=LDsh 2 !Make this circular
=Lcmp "ModNNGate"
=Lerf "ModNNGate"
#Incl "stdnwgtf.iif" !standard # weight fields
@LAdd
! Connect up gating network to gate layer
=SPEl LayN !Destination layer
=NPEl LayN
-NPEl GNNO
-NPEl 1
=cnsc WRel
=cnty WFix
=cnwt 1.0
@LCCr !Connect correspondingly
! Connect up local experts to gate layer
=SPEn GNNO
=n4 0 !Count # experts
:GateLoop
+n4 1
+NPEl 1 !Source layer
=NPEn 0 !Start at the beginning
:InLoop
@NrPE !Select Source PE
@SlPE !Select Destination PE
@PCon !connect two PEs together
+NPEn 1 !next PE in source
+SPEn 1 !next PE in dest.
?NPEn &Out !Source PE off the end?
>blt InLoop !No
?n4 GNNO
>blt GateLoop
! Now build output layer
@LLdf !start with default layer again
=x 100
+x n7
+y 60
=LDln "Output" !layer name
=Lpes &Out !Number of PEs
#Incl "stdnwgtf.iif" !standard # weight fields
@LAdd
! *** Connect the gating layer to the output layer: Setup ***
=SPEl LayN !destination layer
=NPEl LayN
-NPEl 1 !source layer
=SPEn 0
=NPEn GNNO
=cnwt 1.0 !set weight to 1.0
=cnty WFix !fixed weight
=cnsc WRel !relative connections
! *** Connect the gating layer to the output layer ***
:OutCn
@SlPE !select next PE in output layer
@NrPE !select next PE in gate layer
@PCon !connect two PEs together
+SPEn 1 !next PE in output layer
?SPEn &Out !past the end?
>blt OutPEOK
=SPEn 0 !back to start of output layer
:OutPEOK
+NPEn 1 !next PE in the competitive layer
?NPEn n5 !done with layer?
>blt OutCn !no, keep going
! Check-pointing
=LnPr 0 !no pruning
?BPTf "DNNA"
>bne DNNA3
=DLnN 1 !learn count for Check Points
=MWLF 1 !turn on weight limiting
=WtVl 0.99 !max value for weight limiting
:DNNA3
! *** Select Control Strategy & L/R Schedule ***
@LLsl !load super layer
=Lctl "modnn" !control strategy
!Use default fixed schedules for DBD and EDBD.
?BPLf "Delta-Bar-Delta" !DBD?
>bne NotDBD2 !Branch to next test if not
=Llrs "dbd" !Fixed schedule
>br LRSDef !Continue
:NotDBD2
?BPLf "Ext DBD" !EDBD
>bne NotEDBD2 !Branch if not
=Llrs "edbd" !Fixed schedule
>br LRSDef !continue
:NotEDBD2
=Llrs "backprop" !backprop L/R Schedule
:LRSDef
!
! I/O parameters:
!
=Llnn "train" !name of learn input
=Lrcn "test" !name of recall output
#Incl "stdioset.iif" !standard I/O settings
=Lax1 Epch !set epoch from dialog
?BPAa 0 !check for auto-associative
>beq Hetero2
+Lflg LAAs !auto-associative network
:Hetero2
! Target ranges for MinMax tables
=Lscl 0.0 !input low-value
?BPIn 0 !Bipolar Inputs
>beq UnipInp !branch if unipolar
=Lscl -1.0 !input low-value
:UnipInp
=Loff 1.0 !input high-value
=Llow 0.2 !output low-value
=Lhgh 0.8 !output high-value
?BPTf "DNNA" !DNNA?
>beq UniPolar !Yes (Linear output not allowed)
?BPLi 0 !Linear Output?
>bne BiPolar !If yes, bipolar
?BPTf "Sigmoid" !Sigmoid?
>beq UniPolar !Yes
?BPTf "Perceptron" !Perceptron?
>beq UniPolar !Yes
?BPTf "StepFunction" !Step function?
>beq UniPolar !Yes
:BiPolar
=Lscl -1.0 !input low-value
=Llow -0.8 !output low-value
:UniPolar
@SVsl !save it back
!
! Miscellaneous globals
=Grph 1 !Activate instrument list
=jogl -.1 !lower limit for jog
=jogh +.1 !upper limit for jog
=seed 257 !starting seed number
@seed !set the seed
!
! Recall/Test modes
=RnTm 1
=RnRm 1
!set to learn for 50000
=RnLm 0
=LrnN 50000
!
! Initialize the network
!
@Nini
@EOF